The HURDAT dataset is provided by the NOAA, containing data in six-hour time intervals for all hurricanes and tropical storms since 1851. For the purposes of this analysis, we will look at all hurricanes in the North Atlantic Ocean, affecting North America and the Caribbean.
The data is provided as a text file, containing a multidimensional array for storms and individual datapoints for each time interval. The purpose of this notebook is to parse this raw data into cleanly formatted and labeled data that can be used for data exploration and analysis.
This data cleaning will include creating features for the tropical storm/hurricane as a whole (e.g. did it make landfall/what was the max category/duration of system).
Data Description: https://www.nhc.noaa.gov/data/hurdat/hurdat2-format-atlantic.pdf
import os
import re
import pandas as pd
import numpy as np
import datetime
import json
import folium
import seaborn as sns
import chart_studio.plotly as py
import plotly.graph_objs as go
from urllib.request import urlretrieve
from tqdm import tqdm_notebook as tqdm
from plotly.tools import FigureFactory as FF
from plotly import offline
from plotly.offline import iplot
from plotly.offline import init_notebook_mode, iplot
init_notebook_mode()
Hurricane data from the NOAA comes in a pretty raw format. We will first:
read_hurdat()convert_lat_lon()hurr_category()def read_hurdat(url, local_fname, location):
"""
Loads and parses the HURDAT dataset from the NOAA website, given
the input url and location to store the parsed file locally.
"""
if not os.path.exists(local_fname):
urlretrieve(url, local_fname)
records = []
with open(local_fname,'r') as f:
for line in f:
if line.startswith(location):
record = line.strip()
reports = []
records.append((record, reports))
else:
reports.append(line.strip())
return records
def convert_lat_lon(value, col):
"""
Lat/lon is encoded with the numeric value plus E/W/N/S. We want to
convert this into an absolute decimal value for maps.
"""
if col=='lon':
amount = -float(re.sub('[EW]', '', value)) if 'W' in value else float(re.sub('[EW]', '', value))
elif col=='lat':
amount = -float(re.sub('[NS]', '', value)) if 'S' in value else float(re.sub('[NS]', '', value))
return amount
def hurr_category(max_wind):
"""
Defining storm category based on maximum observed winspeed at the time.
"""
max_wind = int(max_wind)
if max_wind <= 73:
cat = 'TS'
if (74 <= max_wind <= 95):
cat = 1
if (96 <= max_wind <= 110):
cat = 2
if (111 <= max_wind <= 129):
cat = 3
if (130 <= max_wind <= 156):
cat = 4
if max_wind >= 157:
cat = 5
return cat
url = "https://www.nhc.noaa.gov/data/hurdat/hurdat2-1851-2017-050118.txt"
local_fname = "../data/hurdat2.txt"
records = read_hurdat(url, local_fname, "AL") # AL for atlantic hurricanes
Lets look at what a raw record looks like (Hurricane RINA):
records[-5]
('AL152017, MARIA, 68,',
['20170916, 1200, , TD, 12.2N, 49.7W, 30, 1006, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,',
'20170916, 1800, , TS, 12.2N, 51.7W, 40, 1004, 40, 0, 0, 40, 0, 0, 0, 0, 0, 0, 0, 0,',
'20170917, 0000, , TS, 12.4N, 53.1W, 45, 1002, 40, 30, 0, 40, 0, 0, 0, 0, 0, 0, 0, 0,',
'20170917, 0600, , TS, 12.8N, 54.4W, 55, 994, 50, 40, 0, 50, 20, 20, 0, 20, 0, 0, 0, 0,',
'20170917, 1200, , TS, 13.3N, 55.7W, 60, 990, 60, 40, 30, 50, 30, 20, 0, 20, 0, 0, 0, 0,',
'20170917, 1800, , HU, 13.6N, 57.0W, 65, 986, 70, 60, 40, 60, 30, 20, 0, 20, 15, 0, 0, 10,',
'20170918, 0000, , HU, 14.0N, 58.0W, 75, 979, 90, 60, 40, 70, 30, 20, 20, 20, 15, 10, 10, 10,',
'20170918, 0600, , HU, 14.3N, 59.0W, 80, 977, 90, 60, 40, 70, 30, 20, 20, 30, 15, 10, 10, 10,',
'20170918, 1200, , HU, 14.5N, 59.7W, 100, 967, 110, 90, 70, 90, 30, 30, 30, 30, 15, 15, 15, 15,',
'20170918, 1800, , HU, 14.9N, 60.4W, 110, 956, 110, 90, 80, 90, 30, 30, 30, 30, 20, 15, 15, 20,',
'20170919, 0000, , HU, 15.3N, 61.1W, 145, 924, 110, 110, 80, 90, 40, 40, 30, 40, 25, 25, 20, 25,',
'20170919, 0115, L, HU, 15.4N, 61.3W, 145, 922, 110, 110, 80, 90, 40, 40, 30, 40, 25, 25, 20, 25,',
'20170919, 0600, , HU, 15.7N, 61.9W, 135, 940, 110, 110, 80, 100, 50, 50, 30, 50, 25, 25, 20, 25,',
'20170919, 1200, , HU, 16.1N, 62.7W, 140, 931, 120, 110, 80, 100, 80, 50, 40, 80, 25, 25, 20, 30,',
'20170919, 1800, , HU, 16.6N, 63.5W, 145, 920, 120, 110, 80, 100, 80, 60, 60, 80, 30, 30, 30, 30,',
'20170920, 0000, , HU, 17.0N, 64.3W, 150, 909, 130, 110, 100, 110, 80, 70, 60, 80, 50, 45, 35, 40,',
'20170920, 0300, I, HU, 17.3N, 64.7W, 150, 908, 130, 110, 100, 110, 80, 70, 60, 80, 50, 45, 35, 40,',
'20170920, 0600, , HU, 17.6N, 65.1W, 140, 913, 130, 110, 100, 110, 80, 70, 60, 80, 50, 45, 35, 40,',
'20170920, 1015, L, HU, 18.0N, 65.9W, 135, 920, 130, 110, 100, 110, 80, 70, 60, 80, 50, 45, 35, 40,',
'20170920, 1200, , HU, 18.2N, 66.2W, 115, 935, 130, 110, 100, 110, 80, 70, 60, 80, 50, 45, 35, 40,',
'20170920, 1800, , HU, 18.6N, 67.0W, 95, 959, 130, 110, 100, 110, 70, 70, 60, 70, 50, 45, 35, 40,',
'20170921, 0000, , HU, 19.0N, 67.6W, 95, 958, 130, 110, 110, 110, 70, 70, 60, 70, 50, 30, 40, 40,',
'20170921, 0600, , HU, 19.4N, 68.2W, 100, 959, 130, 110, 110, 110, 70, 70, 60, 70, 50, 30, 40, 40,',
'20170921, 1200, , HU, 19.9N, 68.8W, 100, 959, 130, 110, 110, 110, 80, 80, 60, 80, 50, 50, 40, 50,',
'20170921, 1800, , HU, 20.5N, 69.5W, 105, 960, 140, 130, 110, 110, 90, 80, 60, 80, 60, 50, 40, 50,',
'20170922, 0000, , HU, 20.8N, 70.0W, 110, 953, 140, 120, 100, 120, 90, 80, 60, 80, 60, 50, 40, 50,',
'20170922, 0600, , HU, 21.2N, 70.5W, 110, 959, 140, 120, 100, 130, 90, 80, 60, 80, 60, 50, 40, 50,',
'20170922, 1200, , HU, 21.9N, 70.9W, 110, 958, 140, 120, 100, 130, 90, 80, 60, 90, 60, 40, 40, 40,',
'20170922, 1800, , HU, 22.8N, 71.2W, 110, 959, 140, 130, 100, 120, 80, 60, 60, 70, 50, 40, 35, 30,',
'20170923, 0000, , HU, 23.7N, 71.6W, 105, 953, 160, 130, 100, 120, 80, 60, 60, 70, 50, 40, 35, 30,',
'20170923, 0600, , HU, 24.4N, 71.9W, 100, 952, 170, 170, 100, 120, 90, 90, 60, 70, 50, 40, 35, 30,',
'20170923, 1200, , HU, 25.1N, 72.1W, 100, 952, 170, 170, 100, 150, 90, 90, 60, 70, 50, 40, 35, 40,',
'20170923, 1800, , HU, 25.9N, 72.3W, 100, 952, 210, 200, 100, 150, 90, 80, 60, 80, 50, 40, 35, 45,',
'20170924, 0000, , HU, 26.6N, 72.4W, 100, 945, 210, 200, 100, 150, 100, 80, 60, 80, 50, 40, 35, 45,',
'20170924, 0600, , HU, 27.5N, 72.6W, 95, 942, 210, 200, 120, 160, 100, 80, 60, 80, 50, 40, 35, 45,',
'20170924, 1200, , HU, 28.4N, 72.8W, 95, 947, 210, 200, 120, 160, 100, 80, 60, 80, 50, 40, 35, 45,',
'20170924, 1800, , HU, 29.1N, 72.9W, 90, 943, 200, 200, 140, 160, 100, 100, 70, 90, 50, 40, 35, 45,',
'20170925, 0000, , HU, 29.7N, 72.9W, 85, 947, 200, 200, 140, 160, 100, 100, 70, 90, 50, 40, 35, 45,',
'20170925, 0600, , HU, 30.3N, 72.9W, 75, 954, 200, 200, 160, 170, 100, 100, 90, 90, 60, 60, 0, 0,',
'20170925, 1200, , HU, 30.8N, 73.0W, 70, 961, 200, 200, 160, 160, 120, 120, 90, 90, 80, 80, 0, 0,',
'20170925, 1800, , HU, 31.4N, 73.1W, 70, 966, 180, 180, 160, 160, 120, 120, 90, 90, 90, 90, 0, 0,',
'20170926, 0000, , HU, 32.0N, 73.1W, 70, 966, 180, 180, 160, 160, 120, 120, 90, 90, 90, 90, 0, 0,',
'20170926, 0600, , HU, 32.6N, 73.1W, 65, 970, 200, 210, 160, 160, 120, 120, 90, 90, 90, 90, 0, 0,',
'20170926, 1200, , HU, 33.3N, 73.1W, 65, 970, 200, 210, 160, 160, 120, 120, 90, 90, 90, 90, 0, 0,',
'20170926, 1800, , HU, 33.9N, 73.1W, 65, 975, 200, 200, 160, 160, 120, 120, 80, 100, 90, 90, 0, 90,',
'20170927, 0000, , HU, 34.4N, 73.0W, 65, 975, 200, 200, 160, 160, 120, 120, 80, 100, 90, 90, 0, 90,',
'20170927, 0600, , HU, 34.9N, 72.9W, 65, 976, 200, 200, 160, 150, 120, 120, 0, 100, 90, 0, 0, 90,',
'20170927, 1200, , HU, 35.4N, 72.8W, 65, 977, 200, 200, 160, 150, 120, 120, 0, 100, 90, 0, 0, 90,',
'20170927, 1800, , HU, 36.0N, 72.6W, 65, 979, 200, 200, 160, 150, 120, 120, 0, 100, 90, 0, 0, 90,',
'20170928, 0000, , HU, 36.6N, 72.2W, 65, 979, 200, 200, 160, 150, 120, 120, 0, 100, 90, 0, 0, 90,',
'20170928, 0600, , TS, 36.7N, 71.3W, 60, 982, 180, 210, 160, 150, 90, 90, 70, 80, 0, 0, 0, 0,',
'20170928, 1200, , TS, 36.8N, 70.0W, 60, 982, 180, 210, 160, 150, 90, 90, 70, 80, 0, 0, 0, 0,',
'20170928, 1800, , TS, 36.8N, 68.6W, 55, 985, 180, 210, 160, 150, 90, 90, 70, 80, 0, 0, 0, 0,',
'20170929, 0000, , TS, 36.9N, 66.8W, 55, 985, 180, 200, 160, 150, 90, 90, 70, 80, 0, 0, 0, 0,',
'20170929, 0600, , TS, 37.0N, 64.6W, 50, 987, 100, 210, 220, 180, 0, 90, 100, 0, 0, 0, 0, 0,',
'20170929, 1200, , TS, 37.0N, 62.0W, 50, 988, 100, 210, 220, 180, 0, 90, 100, 0, 0, 0, 0, 0,',
'20170929, 1800, , TS, 37.4N, 59.0W, 50, 988, 100, 220, 220, 180, 0, 100, 100, 0, 0, 0, 0, 0,',
'20170930, 0000, , TS, 38.1N, 55.6W, 50, 988, 100, 220, 220, 180, 0, 100, 100, 0, 0, 0, 0, 0,',
'20170930, 0600, , TS, 39.1N, 52.2W, 50, 988, 110, 200, 200, 180, 0, 100, 100, 0, 0, 0, 0, 0,',
'20170930, 1200, , TS, 40.0N, 48.8W, 50, 988, 110, 200, 200, 180, 0, 100, 100, 0, 0, 0, 0, 0,',
'20170930, 1800, , EX, 41.2N, 45.6W, 45, 991, 110, 200, 200, 150, 0, 0, 0, 0, 0, 0, 0, 0,',
'20171001, 0000, , EX, 42.2N, 42.6W, 45, 994, 110, 200, 200, 150, 0, 0, 0, 0, 0, 0, 0, 0,',
'20171001, 0600, , EX, 43.4N, 39.4W, 45, 996, 110, 200, 200, 150, 0, 0, 0, 0, 0, 0, 0, 0,',
'20171001, 1200, , EX, 44.9N, 35.5W, 45, 999, 110, 200, 200, 150, 0, 0, 0, 0, 0, 0, 0, 0,',
'20171001, 1800, , EX, 46.5N, 31.0W, 45, 1003, 110, 200, 200, 150, 0, 0, 0, 0, 0, 0, 0, 0,',
'20171002, 0000, , EX, 47.5N, 26.5W, 40, 1005, 0, 0, 200, 150, 0, 0, 0, 0, 0, 0, 0, 0,',
'20171002, 0600, , EX, 48.0N, 22.0W, 40, 1012, 0, 0, 200, 150, 0, 0, 0, 0, 0, 0, 0, 0,',
'20171002, 1200, , EX, 48.0N, 17.0W, 30, 1016, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,'])
From above, it is evident that the data is not very structured. We will parse each record into a nicely formatted dictionary for easier analysis.
hurricanes = {}
for record in records:
hurricane_id = record[0].split(',')[0]
hurricane_name = record[0].split(',')[1].strip()
# add to dict
hurricanes.setdefault(hurricane_id, {'name': hurricane_name})
hurricanes[hurricane_id].setdefault('datapoints', [])
for datapoint in record[1]:
data_list = [x.strip() for x in datapoint.split(',')]
datapoint_dict = {'record_date': data_list[0],
'time': data_list[1],
'record_identifier': data_list[2], # L = landfall, I = peak intensity
'storm_status': data_list[3],
'lat': convert_lat_lon(data_list[4], 'lat'),
'lon': convert_lat_lon(data_list[5], 'lon'),
'max_wind': data_list[6],
'min_pressure': data_list[7],
'category': hurr_category(data_list[6])}
hurricanes[hurricane_id]['datapoints'].append(datapoint_dict)
Lets see what that same record for Hurricane RINA looks like now:
hurricanes['AL192017']
{'name': 'RINA',
'datapoints': [{'record_date': '20171104',
'time': '1200',
'record_identifier': '',
'storm_status': 'LO',
'lat': 25.5,
'lon': -52.3,
'max_wind': '25',
'min_pressure': '1013',
'category': 'TS'},
{'record_date': '20171104',
'time': '1800',
'record_identifier': '',
'storm_status': 'LO',
'lat': 26.5,
'lon': -52.1,
'max_wind': '25',
'min_pressure': '1013',
'category': 'TS'},
{'record_date': '20171105',
'time': '0000',
'record_identifier': '',
'storm_status': 'LO',
'lat': 27.5,
'lon': -52.0,
'max_wind': '25',
'min_pressure': '1013',
'category': 'TS'},
{'record_date': '20171105',
'time': '0600',
'record_identifier': '',
'storm_status': 'LO',
'lat': 28.4,
'lon': -52.0,
'max_wind': '25',
'min_pressure': '1013',
'category': 'TS'},
{'record_date': '20171105',
'time': '1200',
'record_identifier': '',
'storm_status': 'LO',
'lat': 29.0,
'lon': -51.9,
'max_wind': '25',
'min_pressure': '1013',
'category': 'TS'},
{'record_date': '20171105',
'time': '1800',
'record_identifier': '',
'storm_status': 'TD',
'lat': 29.2,
'lon': -51.7,
'max_wind': '30',
'min_pressure': '1012',
'category': 'TS'},
{'record_date': '20171106',
'time': '0000',
'record_identifier': '',
'storm_status': 'TD',
'lat': 29.1,
'lon': -51.2,
'max_wind': '30',
'min_pressure': '1011',
'category': 'TS'},
{'record_date': '20171106',
'time': '0600',
'record_identifier': '',
'storm_status': 'TD',
'lat': 29.0,
'lon': -50.7,
'max_wind': '30',
'min_pressure': '1010',
'category': 'TS'},
{'record_date': '20171106',
'time': '1200',
'record_identifier': '',
'storm_status': 'TD',
'lat': 29.1,
'lon': -50.4,
'max_wind': '30',
'min_pressure': '1010',
'category': 'TS'},
{'record_date': '20171106',
'time': '1800',
'record_identifier': '',
'storm_status': 'TD',
'lat': 29.4,
'lon': -50.2,
'max_wind': '30',
'min_pressure': '1009',
'category': 'TS'},
{'record_date': '20171107',
'time': '0000',
'record_identifier': '',
'storm_status': 'TS',
'lat': 30.0,
'lon': -50.0,
'max_wind': '35',
'min_pressure': '1008',
'category': 'TS'},
{'record_date': '20171107',
'time': '0600',
'record_identifier': '',
'storm_status': 'TS',
'lat': 30.8,
'lon': -49.9,
'max_wind': '40',
'min_pressure': '1006',
'category': 'TS'},
{'record_date': '20171107',
'time': '1200',
'record_identifier': '',
'storm_status': 'TS',
'lat': 31.8,
'lon': -49.5,
'max_wind': '40',
'min_pressure': '1004',
'category': 'TS'},
{'record_date': '20171107',
'time': '1800',
'record_identifier': '',
'storm_status': 'TS',
'lat': 33.0,
'lon': -49.1,
'max_wind': '40',
'min_pressure': '1002',
'category': 'TS'},
{'record_date': '20171108',
'time': '0000',
'record_identifier': '',
'storm_status': 'TS',
'lat': 34.6,
'lon': -48.7,
'max_wind': '45',
'min_pressure': '999',
'category': 'TS'},
{'record_date': '20171108',
'time': '0600',
'record_identifier': '',
'storm_status': 'TS',
'lat': 36.4,
'lon': -48.7,
'max_wind': '50',
'min_pressure': '996',
'category': 'TS'},
{'record_date': '20171108',
'time': '1200',
'record_identifier': '',
'storm_status': 'TS',
'lat': 38.3,
'lon': -48.8,
'max_wind': '45',
'min_pressure': '994',
'category': 'TS'},
{'record_date': '20171108',
'time': '1800',
'record_identifier': '',
'storm_status': 'TS',
'lat': 40.1,
'lon': -49.0,
'max_wind': '45',
'min_pressure': '992',
'category': 'TS'},
{'record_date': '20171109',
'time': '0000',
'record_identifier': '',
'storm_status': 'TS',
'lat': 41.8,
'lon': -48.8,
'max_wind': '45',
'min_pressure': '991',
'category': 'TS'},
{'record_date': '20171109',
'time': '0600',
'record_identifier': '',
'storm_status': 'LO',
'lat': 43.6,
'lon': -48.0,
'max_wind': '40',
'min_pressure': '993',
'category': 'TS'},
{'record_date': '20171109',
'time': '1200',
'record_identifier': '',
'storm_status': 'LO',
'lat': 45.5,
'lon': -47.0,
'max_wind': '40',
'min_pressure': '995',
'category': 'TS'}]}
Much better!
In order to test our hypotheses, we will need some additional data for each hurricane that isn't directly available at the moment, including:
def datapoint_datetime(datapoint):
"""
For each record, we want a datetime field to calculate
length of the storm.
"""
date = datapoint['record_date']
tm = datapoint['time']
datapoint.setdefault('dt', datetime.datetime.strptime(date+tm, "%Y%m%d%H%M"))
return datapoint
def is_hurricane(storm):
"""
Did the storm ever make it to hurricane status?
"""
status_list = []
for record in storm['datapoints']:
status_list.append(record['storm_status'])
hurricane = True if 'HU' in status_list else False
return hurricane
def landfall(storm):
"""
Did the storm make landfall?
"""
records_on_land = 0
for record in storm['datapoints']:
over_land = np.where(record['record_identifier']=='L', 1, 0)
records_on_land += over_land
landfall = True if records_on_land > 0 else False
return landfall
def storm_duration(storm):
"""
How long did the storm last?
"""
date_list = []
for record in storm['datapoints']:
rec_date = record['dt']
date_list.append(rec_date)
duration = max(date_list) - min(date_list)
return duration
def get_year(storm):
"""
Use the first record of the storm to determine the year,
since a majority of the analysis is based on how hurricanes
have changed over the years.
"""
year = storm['datapoints'][0]['dt'].year
return year
def category_min(storm, cat):
"""
Did the storm make landfall?
"""
records_min_cat = 0
for record in storm['datapoints']:
if type(record['category'])==int:
meets_requirement = np.where(record['category']>=cat, 1, 0)
records_min_cat += meets_requirement
category_min = True if records_min_cat > 0 else False
return category_min
# Apply functions to create features
for key, value in tqdm(hurricanes.items(), total=len(hurricanes.items())):
for record in value['datapoints']:
record = datapoint_datetime(record)
value['is_hurricane'] = is_hurricane(value)
value['landfall'] = landfall(value)
value['duration'] = storm_duration(value)
value['year'] = get_year(value)
value['cat2_or_more'] = category_min(value, 2)
<ipython-input-20-892d97af01af>:3: TqdmDeprecationWarning: This function will be removed in tqdm==5.0.0 Please use `tqdm.notebook.tqdm` instead of `tqdm.tqdm_notebook` for key, value in tqdm(hurricanes.items(), total=len(hurricanes.items())):
Now, let's look Hurricane Rina again to see what we added
rina = hurricanes['AL172017']
print(f"Number of datapoints for Hurricane Rina: {len(rina)}")
Number of datapoints for Hurricane Rina: 7
Great, the data looks ready for some analysis to test our hypotheses! Let's save this cleaned up version before proceeding.
Using our cleaned HURDAT data, we will plot and compare the paths of hurricanes based on different time periods, to get a visual sense of how hurricanes have changed over time due to climate change.\
Before diving into tesitng our hypotheses, we want to look at some maps to visualize how hurricanes have changed over the last 100 years. We will start by looking at the periods 1918-1927 vs 2008-2017.
Our hypothesis states that we expect:
def get_specific_years(hurricane_data, year_start, year_end):
"""
Takes an input of the JSON data with hurricane ID as key
Returns an array of dicts for the range (year_end-1).
"""
year_range = range(year_start, year_end)
filtered_data = [storm_data for storm_id, storm_data in hurricane_data.items() if storm_data['year'] in year_range]
return filtered_data
def create_path_map(storm_list, category_colors):
"""
Given a list of storms to plot and the mapping of category strength
to color, this function will create a folium plot of all the input
storms and return the map as a folium object.
"""
# Initialize map
hurr_map = folium.Map(location=[30, -70], zoom_start = 4)
# Plot each storm's path with info in tooltip
for storm in storm_list:
storm_name = storm['name']
for idx in range(len(storm['datapoints'])):
point = storm['datapoints'][idx]
# Handle last datapoint which will not have a future datapoint
if idx+1 < len(storm['datapoints']):
# Current point
day_points = [(point['lat'], point['lon']),
(storm['datapoints'][idx+1]['lat'], storm['datapoints'][idx+1]['lon'])]
hurr_info = f"""
Storm: {storm_name}
Date: {point['record_date']}
Time: {point['time']}
Status: {point['storm_status']}
Category: {point['category']}
Max Wind: {point['max_wind']}mph
Min Pressure: {point['min_pressure']}millibars
"""
# Plot line
folium.PolyLine(day_points,
tooltip=storm_name,
popup=hurr_info,
color=category_colors[point['category']],
).add_to(hurr_map)
return hurr_map
# hurr_1908_1917 = get_specific_years(hurricanes, 1908, 1918)
hurr_1908_1917 = get_specific_years(hurricanes, 1960, 1970)
hurr_2008_2017 = get_specific_years(hurricanes, 2008, 2018)
# Define mapping for colors depending on category
category_colors = {1: '#ffb3b3',
2: '#ff8080',
3: '#e60000',
4: '#ff0000',
5: '#580808',
'TS': '#ffe6e6'}
map_1908_1917 = create_path_map(hurr_1908_1917, category_colors)
map_2008_2017 = create_path_map(hurr_2008_2017, category_colors)
map_1908_1917